In [2]:
import tensorflow as tf
from tensorflow.keras import layers,models
import matplotlib.pyplot as plt
In [3]:
IMAGE_SIZE=256
BATCH_SIZE=32
CHANNELS=3
EPOCHS=50
In [4]:
dataset = tf.keras.preprocessing.image_dataset_from_directory(
    r"C:\Users\Abhinav pottabathini\OneDrive\Desktop\cvr\project\potato_leaf_dataset\PlantVillage",
    shuffle=True,
    image_size=(IMAGE_SIZE,IMAGE_SIZE),
    batch_size=BATCH_SIZE
)
Found 2152 files belonging to 3 classes.
In [5]:
class_names = dataset.class_names
class_names
Out[5]:
['Potato___Early_blight', 'Potato___Late_blight', 'Potato___healthy']
In [6]:
len(dataset)
Out[6]:
68
In [7]:
def get_dataset_partitions_tf(ds,train_split=0.8,test_split=0.1,val_split=0.1,shuffle=True,shuffle_size=10000):
    
    ds_size=len(ds)
    
    if shuffle:
        ds = ds.shuffle(shuffle_size,seed=10)
    
    train_size = int(train_split * ds_size)
    val_size = int(val_split * ds_size)
    
    train_ds = ds.take(train_size)
    val_ds = ds.skip(train_size).take(val_size)
    test_ds = ds.skip(train_size).skip(val_size)
    
    return train_ds,val_ds,test_ds
In [8]:
train_ds,val_ds,test_ds = get_dataset_partitions_tf(dataset)
In [9]:
len(train_ds)
Out[9]:
54
In [10]:
len(test_ds)
Out[10]:
8
In [11]:
len(val_ds)
Out[11]:
6
In [17]:
train_ds = train_ds.cache().shuffle(1000).prefetch(buffer_size=tf.data.AUTOTUNE)
val_ds = val_ds.cache().shuffle(1000).prefetch(buffer_size=tf.data.AUTOTUNE)
test_ds = test_ds.cache().shuffle(1000).prefetch(buffer_size=tf.data.AUTOTUNE)
In [13]:
resize_and_rescale = tf.keras.Sequential([
    tf.keras.layers.Resizing(IMAGE_SIZE,IMAGE_SIZE),
    tf.keras.layers.Rescaling(1.0/255)
])
In [16]:
data_augmentation = tf.keras.Sequential([
    tf.keras.layers.RandomFlip("horizontal_and_vertical"),
    tf.keras.layers.RandomRotation(0.2)
])
In [24]:
input_shape= (BATCH_SIZE,IMAGE_SIZE,IMAGE_SIZE,CHANNELS)
n_classes=3
model = models.Sequential([
    resize_and_rescale,
    data_augmentation,
    layers.Conv2D(32,(3,3),activation='relu',input_shape=input_shape),
    layers.MaxPooling2D((2,2)),
    layers.Conv2D(64,(3,3),activation='relu'),
    layers.MaxPooling2D((2,2)),
    layers.Conv2D(64,(3,3),activation='relu'),
    layers.MaxPooling2D((2,2)),
    layers.Conv2D(64,(3,3),activation='relu'),
    layers.MaxPooling2D((2,2)),
    layers.Conv2D(64,(3,3),activation='relu'),
    layers.MaxPooling2D((2,2)),
    layers.Conv2D(64,(3,3),activation='relu'),
    layers.MaxPooling2D((2,2)),
    
    layers.Flatten(),
    layers.Dense(64,activation='relu'),
    layers.Dense(n_classes,activation='softmax')
])

model.build(input_shape=input_shape)
In [25]:
model.summary()
Model: "sequential_5"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━┓
┃ Layer (type)                         ┃ Output Shape                ┃         Param # ┃
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━┩
│ sequential (Sequential)              │ (32, 256, 256, 3)           │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ sequential_1 (Sequential)            │ (32, 256, 256, 3)           │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ conv2d_7 (Conv2D)                    │ (32, 254, 254, 32)          │             896 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ max_pooling2d_7 (MaxPooling2D)       │ (32, 127, 127, 32)          │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ conv2d_8 (Conv2D)                    │ (32, 125, 125, 64)          │          18,496 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ max_pooling2d_8 (MaxPooling2D)       │ (32, 62, 62, 64)            │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ conv2d_9 (Conv2D)                    │ (32, 60, 60, 64)            │          36,928 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ max_pooling2d_9 (MaxPooling2D)       │ (32, 30, 30, 64)            │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ conv2d_10 (Conv2D)                   │ (32, 28, 28, 64)            │          36,928 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ max_pooling2d_10 (MaxPooling2D)      │ (32, 14, 14, 64)            │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ conv2d_11 (Conv2D)                   │ (32, 12, 12, 64)            │          36,928 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ max_pooling2d_11 (MaxPooling2D)      │ (32, 6, 6, 64)              │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ conv2d_12 (Conv2D)                   │ (32, 4, 4, 64)              │          36,928 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ max_pooling2d_12 (MaxPooling2D)      │ (32, 2, 2, 64)              │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ flatten_3 (Flatten)                  │ (32, 256)                   │               0 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ dense_6 (Dense)                      │ (32, 64)                    │          16,448 │
├──────────────────────────────────────┼─────────────────────────────┼─────────────────┤
│ dense_7 (Dense)                      │ (32, 3)                     │             195 │
└──────────────────────────────────────┴─────────────────────────────┴─────────────────┘
 Total params: 183,747 (717.76 KB)
 Trainable params: 183,747 (717.76 KB)
 Non-trainable params: 0 (0.00 B)
In [26]:
model.compile(
    optimizer='adam',
    loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False),
    metrics=['accuracy']
)
In [28]:
history = model.fit(
    train_ds,
    epochs=EPOCHS,
    batch_size=BATCH_SIZE,
    verbose=1,
    validation_data=val_ds
)
Epoch 1/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 130s 2s/step - accuracy: 0.4373 - loss: 0.9498 - val_accuracy: 0.5990 - val_loss: 0.8106
Epoch 2/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 90s 2s/step - accuracy: 0.6483 - loss: 0.7565 - val_accuracy: 0.7135 - val_loss: 0.6387
Epoch 3/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 76s 1s/step - accuracy: 0.7259 - loss: 0.5770 - val_accuracy: 0.7656 - val_loss: 0.7527
Epoch 4/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 58s 1s/step - accuracy: 0.7655 - loss: 0.6856 - val_accuracy: 0.8438 - val_loss: 0.4255
Epoch 5/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 54s 998ms/step - accuracy: 0.8547 - loss: 0.3646 - val_accuracy: 0.8594 - val_loss: 0.3840
Epoch 6/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 61s 1s/step - accuracy: 0.8474 - loss: 0.3752 - val_accuracy: 0.8594 - val_loss: 0.3048
Epoch 7/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 62s 1s/step - accuracy: 0.9019 - loss: 0.2540 - val_accuracy: 0.8958 - val_loss: 0.2391
Epoch 8/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 62s 1s/step - accuracy: 0.9250 - loss: 0.1863 - val_accuracy: 0.9062 - val_loss: 0.2414
Epoch 9/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 55s 1s/step - accuracy: 0.9223 - loss: 0.1918 - val_accuracy: 0.8698 - val_loss: 0.3635
Epoch 10/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 56s 1s/step - accuracy: 0.9512 - loss: 0.1320 - val_accuracy: 0.8750 - val_loss: 0.2326
Epoch 11/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 63s 1s/step - accuracy: 0.9412 - loss: 0.1322 - val_accuracy: 0.8854 - val_loss: 0.3292
Epoch 12/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 64s 1s/step - accuracy: 0.9412 - loss: 0.1616 - val_accuracy: 0.9115 - val_loss: 0.2265
Epoch 13/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 56s 1s/step - accuracy: 0.9483 - loss: 0.1391 - val_accuracy: 0.9219 - val_loss: 0.1828
Epoch 14/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 61s 1s/step - accuracy: 0.9542 - loss: 0.1023 - val_accuracy: 0.9583 - val_loss: 0.1102
Epoch 15/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 57s 1s/step - accuracy: 0.9669 - loss: 0.0848 - val_accuracy: 0.9167 - val_loss: 0.1782
Epoch 16/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 57s 1s/step - accuracy: 0.9663 - loss: 0.0940 - val_accuracy: 0.9375 - val_loss: 0.1540
Epoch 17/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 53s 971ms/step - accuracy: 0.9763 - loss: 0.0597 - val_accuracy: 0.9583 - val_loss: 0.1043
Epoch 18/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 51s 952ms/step - accuracy: 0.9758 - loss: 0.0676 - val_accuracy: 0.9635 - val_loss: 0.0976
Epoch 19/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 54s 1s/step - accuracy: 0.9829 - loss: 0.0655 - val_accuracy: 0.9583 - val_loss: 0.1045
Epoch 20/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 54s 1s/step - accuracy: 0.9711 - loss: 0.0931 - val_accuracy: 0.9635 - val_loss: 0.1157
Epoch 21/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 55s 1s/step - accuracy: 0.9840 - loss: 0.0560 - val_accuracy: 0.9167 - val_loss: 0.2138
Epoch 22/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 57s 1s/step - accuracy: 0.9775 - loss: 0.0618 - val_accuracy: 0.9583 - val_loss: 0.1399
Epoch 23/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 53s 986ms/step - accuracy: 0.9847 - loss: 0.0370 - val_accuracy: 0.9688 - val_loss: 0.0704
Epoch 24/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 50s 918ms/step - accuracy: 0.9877 - loss: 0.0329 - val_accuracy: 0.9323 - val_loss: 0.1723
Epoch 25/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 49s 913ms/step - accuracy: 0.9803 - loss: 0.0454 - val_accuracy: 0.9531 - val_loss: 0.0924
Epoch 26/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 51s 952ms/step - accuracy: 0.9881 - loss: 0.0392 - val_accuracy: 0.9792 - val_loss: 0.0480
Epoch 27/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 54s 993ms/step - accuracy: 0.9864 - loss: 0.0373 - val_accuracy: 0.9792 - val_loss: 0.0807
Epoch 28/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 59s 1s/step - accuracy: 0.9982 - loss: 0.0141 - val_accuracy: 0.9740 - val_loss: 0.0673
Epoch 29/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 50s 925ms/step - accuracy: 0.9960 - loss: 0.0149 - val_accuracy: 0.9792 - val_loss: 0.0605
Epoch 30/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 49s 910ms/step - accuracy: 0.9802 - loss: 0.0458 - val_accuracy: 0.9896 - val_loss: 0.0539
Epoch 31/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 52s 959ms/step - accuracy: 0.9626 - loss: 0.1111 - val_accuracy: 0.9948 - val_loss: 0.0318
Epoch 32/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 54s 996ms/step - accuracy: 0.9823 - loss: 0.0454 - val_accuracy: 0.9896 - val_loss: 0.0383
Epoch 33/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 64s 1s/step - accuracy: 0.9806 - loss: 0.0403 - val_accuracy: 0.9323 - val_loss: 0.1873
Epoch 34/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 52s 968ms/step - accuracy: 0.9815 - loss: 0.0537 - val_accuracy: 0.9792 - val_loss: 0.0686
Epoch 35/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 51s 946ms/step - accuracy: 0.9878 - loss: 0.0310 - val_accuracy: 0.9583 - val_loss: 0.0914
Epoch 36/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 50s 930ms/step - accuracy: 0.9921 - loss: 0.0255 - val_accuracy: 0.9792 - val_loss: 0.0371
Epoch 37/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 54s 994ms/step - accuracy: 0.9831 - loss: 0.0415 - val_accuracy: 0.9531 - val_loss: 0.0972
Epoch 38/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 53s 989ms/step - accuracy: 0.9829 - loss: 0.0521 - val_accuracy: 0.9948 - val_loss: 0.0217
Epoch 39/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 54s 1s/step - accuracy: 0.9934 - loss: 0.0205 - val_accuracy: 0.9688 - val_loss: 0.0840
Epoch 40/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 52s 965ms/step - accuracy: 0.9949 - loss: 0.0155 - val_accuracy: 0.9844 - val_loss: 0.0448
Epoch 41/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 52s 962ms/step - accuracy: 0.9898 - loss: 0.0276 - val_accuracy: 0.9688 - val_loss: 0.0992
Epoch 42/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 51s 950ms/step - accuracy: 0.9753 - loss: 0.0590 - val_accuracy: 0.9844 - val_loss: 0.0298
Epoch 43/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 57s 1s/step - accuracy: 0.9936 - loss: 0.0187 - val_accuracy: 0.9948 - val_loss: 0.0125
Epoch 44/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 56s 1s/step - accuracy: 0.9866 - loss: 0.0279 - val_accuracy: 0.9896 - val_loss: 0.0248
Epoch 45/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 52s 969ms/step - accuracy: 0.9974 - loss: 0.0115 - val_accuracy: 0.9948 - val_loss: 0.0092
Epoch 46/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 52s 954ms/step - accuracy: 0.9939 - loss: 0.0145 - val_accuracy: 0.9583 - val_loss: 0.1054
Epoch 47/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 52s 958ms/step - accuracy: 0.9869 - loss: 0.0374 - val_accuracy: 0.9896 - val_loss: 0.0155
Epoch 48/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 54s 1s/step - accuracy: 0.9933 - loss: 0.0274 - val_accuracy: 0.9635 - val_loss: 0.0929
Epoch 49/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 53s 988ms/step - accuracy: 0.9940 - loss: 0.0183 - val_accuracy: 0.9896 - val_loss: 0.0183
Epoch 50/50
54/54 ━━━━━━━━━━━━━━━━━━━━ 55s 1s/step - accuracy: 0.9941 - loss: 0.0136 - val_accuracy: 1.0000 - val_loss: 0.0044
In [30]:
scores = model.evaluate(test_ds)
8/8 ━━━━━━━━━━━━━━━━━━━━ 2s 249ms/step - accuracy: 1.0000 - loss: 0.0027
In [35]:
acc=history.history['accuracy']
val_acc=history.history['val_accuracy']

loss=history.history['loss']
val_loss=history.history['val_loss']
In [36]:
plt.figure(figsize=(8,8))
plt.subplot(1,2,1)
plt.plot(range(EPOCHS),acc,label='Training Accuracy')
plt.plot(range(EPOCHS),val_acc,label='Validation Accuracy')
plt.legend(loc='lower right')
plt.title('Training and Validataion Accuracy')

plt.subplot(1,2,2)
plt.plot(range(EPOCHS),loss,label='Training loss')
plt.plot(range(EPOCHS),val_loss,label='Validation loss')
plt.legend(loc='lower right')
plt.title('Training and Validation Loss')
Out[36]:
Text(0.5, 1.0, 'Training and Validation Loss')
In [69]:
import numpy as np
for images_batch,labels_batch in test_ds.take(1):
    
    first_image = images_batch[0].numpy().astype('uint8')
    first_label = labels_batch[0].numpy()
    
    print("First image to predict")
    plt.imshow(first_image)
    print("Actual class of the image:",class_names[first_label])
    
    batch_prediction = model.predict(images_batch)
    predicted_label = np.argmax(batch_prediction[0])
    print("Predicted class of the image:",class_names[predicted_label])
    
    confidence = round(100*(np.max(batch_prediction[0])),2)
    print("Confidence:",confidence)
    
   
First image to predict
Actual class of the image: Potato___Early_blight
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 363ms/step
Predicted class of the image: Potato___Early_blight
Confidence: 100.0
In [73]:
def predict(model,img):
    img_array = tf.keras.preprocessing.image.img_to_array(images[i].numpy())
    img_array = tf.expand_dims(img_array,0)
    
    predictions = model.predict(img_array)
    
    predicted_class = class_names[np.argmax(predictions[0])]
    confidence= round(100*(np.max(predictions[0])))
    return predicted_class,confidence
    
In [80]:
plt.figure(figsize=(20,20))
for images,labels in test_ds.take(1):
    for i in range(9):
        axis=plt.subplot(3,3,i+1)
        plt.imshow(images[i].numpy().astype('uint8'))
        
        predicted_class,confidence = predict(model,images[i].numpy())
        actual_class=class_names[labels[i]]
        plt.title(f"Actual:{actual_class},\nPredicted:{predicted_class}\nConfidence:{confidence}%")
        
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 57ms/step
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 36ms/step
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 39ms/step
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 41ms/step
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 43ms/step
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 52ms/step
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 40ms/step
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 46ms/step
1/1 ━━━━━━━━━━━━━━━━━━━━ 0s 54ms/step
In [ ]: